library(plotly)
## Loading required package: ggplot2
## 
## Attaching package: 'plotly'
## The following object is masked from 'package:ggplot2':
## 
##     last_plot
## The following object is masked from 'package:stats':
## 
##     filter
## The following object is masked from 'package:graphics':
## 
##     layout
library(tidyverse)
## ── Attaching packages ─────────────────────────────────────────────────────── tidyverse 1.2.1 ──
## ✔ tibble  1.4.2     ✔ purrr   0.2.5
## ✔ tidyr   0.8.2     ✔ dplyr   0.7.8
## ✔ readr   1.1.1     ✔ stringr 1.3.1
## ✔ tibble  1.4.2     ✔ forcats 0.3.0
## ── Conflicts ────────────────────────────────────────────────────────── tidyverse_conflicts() ──
## ✖ dplyr::filter() masks plotly::filter(), stats::filter()
## ✖ dplyr::lag()    masks stats::lag()
library(MCMCpack)
## Loading required package: coda
## Loading required package: MASS
## 
## Attaching package: 'MASS'
## The following object is masked from 'package:dplyr':
## 
##     select
## The following object is masked from 'package:plotly':
## 
##     select
## ##
## ## Markov Chain Monte Carlo Package (MCMCpack)
## ## Copyright (C) 2003-2019 Andrew D. Martin, Kevin M. Quinn, and Jong Hee Park
## ##
## ## Support provided by the U.S. National Science Foundation
## ## (Grants SES-0350646 and SES-0350613)
## ##
####################################
#
#
#Setting for the simulation
#
#
####################################


## The weight for different distributions in the mixture
w = list()
w[[1]] = c(0.75, 0.25)
w[[2]] = c(0.55, 0.45)
w[[3]] = c(0.40, 0.30, 0.30)
w[[4]] = c(0.39, 0.29, 0.29, 0.03)

## The mean for different distirubtion in the mixture
mu = list()
mu[[1]] = c(0, 3.0)
mu[[2]] = c(0, 3.0)
mu[[3]] = c(0, -2.0, 2.0)
mu[[4]] = c(0, -2.0, 2.0, 10.0)

## The variance for different distribution in the mixture
sigma = list()
sigma[[1]] = c(1.0, 2.0)
sigma[[2]] = c(1.0, 2.0)
sigma[[3]] = c(1.0, 2.0, 2.0)
sigma[[4]] = c(1.0, 2.0, 2.0, 1.0)

## The number of samples and its nested samples
J = 20
N = 500

## The density for the four mixtures
p_density = list()
p_density[[1]] = w[[1]][1] * dnorm(seq(-10, 10, 0.1), mu[[1]][1], sqrt(sigma[[1]][1])) + 
  w[[1]][2] * dnorm(seq(-10, 10, 0.1), mu[[1]][2], sqrt(sigma[[1]][2]))
p_density[[2]] = w[[2]][1] * dnorm(seq(-10, 10, 0.1), mu[[2]][1], sqrt(sigma[[2]][1])) + 
  w[[2]][2] * dnorm(seq(-10, 10, 0.1), mu[[2]][2], sqrt(sigma[[2]][2]))
p_density[[3]] = w[[3]][1] * dnorm(seq(-10, 10, 0.1), mu[[3]][1], sqrt(sigma[[3]][1])) + 
  w[[3]][2] * dnorm(seq(-10, 10, 0.1), mu[[3]][2], sqrt(sigma[[3]][2])) + 
  w[[3]][3] * dnorm(seq(-10, 10, 0.1), mu[[3]][3], sqrt(sigma[[3]][3]))
p_density[[4]] = w[[4]][1] * dnorm(seq(-10, 10, 0.1), mu[[4]][1], sqrt(sigma[[4]][1])) + 
  w[[4]][2] * dnorm(seq(-10, 10, 0.1), mu[[4]][2], sqrt(sigma[[4]][2])) + 
  w[[4]][3] * dnorm(seq(-10, 10, 0.1), mu[[4]][3], sqrt(sigma[[4]][3])) + 
  w[[4]][4] * dnorm(seq(-10, 10, 0.1), mu[[4]][4], sqrt(sigma[[4]][4]))

plot_ly(x = seq(-10, 10, 0.1), y  = p_density[[1]], name = 'T1', type = 'scatter', mode = 'lines') %>%
  add_trace(y = p_density[[2]], name = 'T2', mode = 'lines') %>%
  add_trace(y = p_density[[3]], name = 'T3', mode = 'lines') %>%
  add_trace(y = p_density[[4]], name = 'T4', mode = 'lines') %>%
  layout(title = "True distributions used in the simulation study")
##############################################
#
#
# Generate the simulation data
#
#
###############################################


## Number of samples for each distribution
n_sample = c(1,1,1,1,1,2,2,2,2,2,3,3,3,3,3,4,4,4,4,4)

## Do the permutation
n_sample = n_sample[sample(1 : J, replace = FALSE)]

## Store the data in dat
dat = c()

## Store the group in group
group = c()

## Store the data for each group
data_store = list()

## Generate the simulation data
for(j in 1 : J){
  dat = c()
  index_j = n_sample[j]
  print(index_j)
  ind = sample(1 : length(w[[index_j]]), N, replace = TRUE, prob = w[[index_j]])
  for(k in 1 : length(w[[index_j]])){
    dat = c(dat, rnorm(sum(ind == k), mu[[index_j]][k], sqrt(sigma[[index_j]][k])))
  }
  data_store[[j]] = dat
}
## [1] 1
## [1] 4
## [1] 3
## [1] 2
## [1] 2
## [1] 1
## [1] 1
## [1] 4
## [1] 4
## [1] 4
## [1] 2
## [1] 3
## [1] 4
## [1] 2
## [1] 1
## [1] 3
## [1] 3
## [1] 3
## [1] 2
## [1] 1
##############################################################################
#
#
# Hyperparameters for the model
#
#
##############################################################################

## The number of samples and its nested samples
J = 20
N = rep(500, J)

## Truncated value for the nested dirichlet process
K = 35
L = 55

## Number of indicators to be performed
T = 100

## Hyperparameters for the prior
## Normal-Inverse-Gamma Prior
mu_nig = 0
lambda_nig = 0.01
alpha_nig = 3
beta_nig = 1

## Gamma prior for alpha and beta
a_alpha = 1
b_alpha = 1
a_beta = 1
b_beta = 1

##############################################################################
#
#
# Initialization of the parameters
#
#
##############################################################################

## The list to store the indicator for the centers
ind_center_store = list()
ind_center_store[[1]] = sample(1 : K, J, replace = TRUE, prob = 1 : K)

## The list to store the indicator for the patients within the centers
ind_patient_store = list()
ind_patient_store[[1]] = list()
for(j in 1 : J){
  ind_patient_store[[1]][[j]] = sample(1 : L, N[j], replace = TRUE, prob = 1 : L)
}

## The list to store u,pi,v,w
## To store u
u_store = list()
u_store[[1]] = rep(0.5, K)
u_store[[1]][K] = 1

## To store pi
pi_store = list()
pi_store[[1]] = rep(0, K)
pi_store[[1]][1] = u_store[[1]][1]
for(k in 2 : K){
  pi_store[[1]][k] = u_store[[1]][k]
  for(t in 1 : (k - 1)){
    pi_store[[1]][k] = pi_store[[1]][k] * (1 - u_store[[1]][t])
  }
}

## To store v
v_store = list()
v_store[[1]] = matrix(0.5, nrow = L, ncol = K)
v_store[[1]][L, ] = 1
  
## To store w
w_store = list()
w_store[[1]] = matrix(0, nrow = L, ncol = K)
w_store[[1]][1, ] = v_store[[1]][1, ]
for(l in 2 : L){
  w_store[[1]][l, ] = v_store[[1]][l, ]
  for(t in 1 : (l - 1)){
    w_store[[1]][l, ] = w_store[[1]][l, ] * (1 - v_store[[1]][t, ])
  }
}


## Parameters for the gaussian distribution
theta_store = list()
theta_store[[1]] = matrix(0, nrow = L, ncol = K)

## Concentration parameters for the alpha and beta
alpha_store = 2
beta_store = 2

## sqrt(variance) component for the component noted as psi
psi = 1



##############################################################################
#
#
# MCMC procedure
#
#
##############################################################################

for(ite in 2 : 200){
  
  ## First compute the indicator for each medical center
  ind_center_store[[ite]] = rep(0, J)
  for(j in 1 : J){
    ## For sample j
    prod_density = rep(0, K)
    for(k in 1 : K){
      ## for possiblity k  
        for(i in 1 : length(data_store[[j]])){
          sum_density = 0
          # for item i 
          for(l in 1 : L){
            sum_density = sum_density + w_store[[ite - 1]][l, k] * 
              dnorm(data_store[[j]][i], theta_store[[ite - 1]][l,k], psi[ite - 1])
          }
          prod_density[k] = prod_density[k] + log(sum_density)
        }
        #print(prod_density[k])
        prod_density[k] = prod_density[k] + log(pi_store[[ite - 1]][k])
    }
      #print(prod_density)
      #print(j)
      prod_density = exp(prod_density - max(prod_density)) / sum(exp(prod_density - max(prod_density)))
      ind_center_store[[ite]][j] = sample(1 : K, 1, replace = FALSE, prob = prod_density)
  }
  
  ## The indicator for each group 
  ind_patient_store[[ite]] = list()
  ## For every medical center
  for(j in 1 : J){
    ## For every patient in medical center j
    index_j = ind_center_store[[ite]][j]
    ind_patient_store[[ite]][[j]] = rep(0, N[j])
    for(i in 1 : N[j]){
      nested_probability = rep(0, L)
      for(l in 1 : L){
        nested_probability[l] = log(w_store[[ite - 1]][l, index_j]) + log(dnorm(data_store[[j]][i], theta_store[[ite - 1]][l, index_j], psi[ite - 1]))
      }
      nested_probability = exp(nested_probability - max(nested_probability))
      ind_patient_store[[ite]][[j]][i] = sample(1 : L, 1, replace = FALSE, prob = nested_probability)
    }
  }
  
  ## Update the weighting u
  u_store[[ite]] = rep(0, K)
  ## Update u for the pi_k
  for(k in 1 : (K - 1)){
    m_k = sum(ind_center_store[[ite]] == k)
    m_larger_k = sum(ind_center_store[[ite]] > k)
    u_store[[ite]][k] = rbeta(1, 1 + m_k, alpha_store[ite - 1] + m_larger_k)
  }
  u_store[[ite]][K] = 1
  
  ## Update the pi based on u
  pi_store[[ite]] = rep(0, K)
  pi_store[[ite]][1] = u_store[[ite]][1]
  for(k in 2 : K){
    pi_store[[ite]][k] = u_store[[ite]][k]
    for(t in 1 : (k - 1)){
      pi_store[[ite]][k] = pi_store[[ite]][k] * (1 - u_store[[ite]][t])
    }
  }
  
  ## Update the v
  ## Initialize v_store for the index 'ite'
  v_store[[ite]] = matrix(0.5, nrow = L, ncol = K)
  v_store[[ite]][L, ] = 1
  
  ## For every Medical Center type
  for(k in 1 : K){
    ## For every Patient type
    index_k = which(ind_center_store[[ite]] == k)
    for(l in 1 : (L - 1)){
      ## For each patient in the type-k medical center
      n_lk = 0
      n_larger_lk = 0
      for(t in index_k){
        n_lk = n_lk + sum(ind_center_store[[ite]][t] == l)
        n_larger_lk = n_larger_lk + sum(ind_center_store[[ite]][t] > l)
      }
      v_store[[ite]][l, k] = rbeta(1, 1 + n_lk, beta_store[ite - 1] + n_larger_lk)
    }
  }
  
  
  ## Update w
  w_store[[ite]] = matrix(0, nrow = L, ncol = K)
  w_store[[ite]][1, ] = v_store[[ite]][1, ]
  for(l in 2 : L){
    w_store[[ite]][l, ] = v_store[[ite]][l, ]
    for(t in 1 : (l - 1)){
      w_store[[ite]][l, ] = w_store[[ite]][l, ] * (1 - v_store[[ite]][t, ])
    }
  }
  
  ## Update theta
  theta_store[[ite]] = matrix(0, nrow = L, ncol = K)
  ## For every medical center type
  for(k in 1 : K){
    ## The index of medical centers which is indexed as k
    index_k = which(ind_center_store[[ite]] == k)
    n_lk = 0
    y_lk = 0
    ## For every truncated patient type
    for(l in 1 : L){
      for(t in index_k){
        index_l = which(ind_patient_store[[ite]][[t]] == l)
        n_lk = n_lk + sum(ind_patient_store[[ite]][[t]] == l)
        y_lk = y_lk + sum(data_store[[t]][index_l])
      }
      u_temp = y_lk / (n_lk + lambda_nig)  + mu_nig * lambda_nig / (n_lk + lambda_nig)
      sigma_temp = psi[ite - 1]^2 / (n_lk + lambda_nig)
      theta_store[[ite]][l, k] = rnorm(1, u_temp, sqrt(sigma_temp))
    }
  }
  
  ## Sample the concentration parameters alpha and beta
  alpha_temp = rgamma(1, a_alpha + (K - 1), b_alpha - sum(log(1 - u_store[[ite]])[1 : (K - 1)]))
  beta_temp = rgamma(1, a_beta + K * (L - 1), b_beta - sum(log(1 - v_store[[ite]][1 : (L - 1), ])))
  
  alpha_store = c(alpha_store, alpha_temp)
  beta_store = c(beta_store, beta_temp)
  
  ## Sample the variance component for the base measure
  N_temp = 0
  for(i in 1 : length(data_store)){
      N_temp = N_temp + length(data_store[[i]])
  }
  
  y_sum_temp = 0
  for(i in 1 : length(data_store)){
    for(j in 1 : length(data_store[[i]])){
      y_sum_temp = y_sum_temp + (data_store[[i]][j] - theta_store[[ite]][ind_patient_store[[ite]][[i]][j], ind_center_store[[ite]][i]])^2
      #print(y_sum_temp)
    }
  }
  alpha_psi = N_temp / 2 + K * L / 2 + (alpha_nig + 1) * L * K - 1
  beta_psi = (y_sum_temp + 2 * K * L * beta_nig + lambda_nig * sum((theta_store[[ite]] - mu_nig) * (theta_store[[ite]] - mu_nig) )) / 2
  psi = c(psi, sqrt(rinvgamma(1, alpha_psi, beta_psi)))
  print(ite)
  print(ind_center_store[[ite]])
}
## [1] 2
##  [1] 9 3 1 2 1 2 1 1 1 5 2 2 1 1 2 2 5 3 1 1
## [1] 3
##  [1] 1 7 3 7 7 2 1 7 7 7 7 7 7 7 1 7 7 3 7 1
## [1] 4
##  [1]  7  7  3  7  7  7  7 29 29 29  7  3 29  7  7  3  3  3  7  7
## [1] 5
##  [1]  7 11 29  7  7  7  7 11 11 11  7 29 11  7  7 29 29 29  7  7
## [1] 6
##  [1]  7  2 11  7  7  7  7  2  2  2  7 11  2  7  7 11 11 11  7  7
## [1] 7
##  [1]  2  2 11  7  7  2  2  2  2  2  7 35  2  7  2 35 35 11  7  2
## [1] 8
##  [1]  2 23 35  2  2  2  2 23 23 23  2 23 23 23 23 35 35 35 23  2
## [1] 9
##  [1] 35 18 35  2  2 35 35 18 18 18  2 35 18  2 35 35 35 35  2 35
## [1] 10
##  [1] 35 22 18  2  2 35 35 22 22 21  2 18 21  2 35 18 18 18  2 35
## [1] 11
##  [1] 18 19 19  2  2 18 18 19 19 19  2 19 19  2 18 19 19 19  2 18
## [1] 12
##  [1] 18 15 19 18 18 18 18 15 15 15 18 19 15 18 18 19 19 19 18 18
## [1] 13
##  [1] 15 33 19 18 18 19 15 26 26 26 18 19 26 18 19 19 19 19 18 19
## [1] 14
##  [1] 15 19 26 15 15 19 15 21 21 21 15 26 21 15 19 26 26 26 15 19
## [1] 15
##  [1] 19  6 26 19 19 21 21  6  6  6 19  6  6 19 21 26 26 26 19 19
## [1] 16
##  [1] 21 27  6 19 19 21 21 27 27 27 19  6 27 19 21  6  6  6 19 21
## [1] 17
##  [1]  6  6 27  6  6  6  6 26 26 26  6 27 26  6  6 27 27 27  6  6
## [1] 18
##  [1] 26 19 26  6  6 26 26 19 19 19  6 19 19  6 26 19 26 26  6 26
## [1] 19
##  [1] 26  3 19  6  6 26 26  3  3  3  6 19  3  6 26 19 19 19  6 26
## [1] 20
##  [1] 26 18  3 26 26 26 26 18 18 18 26  3 18 26 26  3  3  3 26 26
## [1] 21
##  [1]  3  3 18  3  3  3  3  3  3 29  3 18 29  3  3 18 18 18  3  3
## [1] 22
##  [1]  3 29 29  3  3 29 29 23 23 23  3 29 23  3 29 29 29 29  3 29
## [1] 23
##  [1]  3 28 29  3  3 29 29 28 28 28  3 29 28  3 29 29 29 29  3 29
## [1] 24
##  [1] 28  1 29  3  3 28 28  1  1  1  3 29  1  3 28 29 29 29  3 28
## [1] 25
##  [1]  1 28 28  3  3  1  1 34 23 23  3 28 34  3  1 28 28 28  3  1
## [1] 26
##  [1]  1  8 34  3  3  1  1  8  8  8  3 28  8  3  1 34 34 34  3  1
## [1] 27
##  [1]  1  8  8  3  3  1  1 32 32 32  3  8 32  3  1  8  8  8  3  1
## [1] 28
##  [1]  1 35 32  3  3 35  1 35 35 35  3 32 35  3 32 32 32 32  3 35
## [1] 29
##  [1] 35 10 32  3  3 35 35 10 10 10  3 10 10  3 35 10 10 32  3 35
## [1] 30
##  [1] 35 17 10 35 35 10 10 17 17 17 35 10 17 35 10 10 10 10 35 10
## [1] 31
##  [1] 10 10 17 10 10 10 10 22 22 22 10 17 22 10 10 17 17 17 10 10
## [1] 32
##  [1] 10 22 22 10 10 22 10 26 26 26 10 22 26 10 22 22 22 22 10 22
## [1] 33
##  [1] 22 22 22 26 26 22 22 22 22 22 26 22 23 26 22 22 22 22 26 22
## [1] 34
##  [1] 22 29 23 22 22 22 22 29 29 29 22 23 29 22 22 23 23 23 22 22
## [1] 35
##  [1] 29  7  7 22 22 29 29  7  7  7 22  7  7 22 29  7  7  7 22 29
## [1] 36
##  [1] 7 7 7 7 7 7 7 9 9 9 7 7 9 7 7 7 7 7 7 7
## [1] 37
##  [1] 32 32 32 32 32 32  7 32 32 32 32 32 32 32 32 32 32 32 32 32
## [1] 38
##  [1]  7 32 32  7  7  7  7 17 17 17  7 32 17  7  7 32 32 32  7  7
## [1] 39
##  [1]  7 17 17  7  7  7  7  2  2  2  7 17  2  7  7 17 17 17  7  7
## [1] 40
##  [1]  7 10 17 10 10  7  7  1 32  1 10 17 10 10  7 17 17 17 10  7
## [1] 41
##  [1] 10  1  1 10 10  7 10  1  1  1 10  1  1 10  7  1  1  1 10 10
## [1] 42
##  [1] 10 23  7 10 10  7 10 23 22 23 10  7 22 10  7  7  7  7 10  7
## [1] 43
##  [1] 22 22  7 22 22 23 22 27 27 27 22 23 27 22 22  7 23  7 22 22
## [1] 44
##  [1] 27 27 23 22 22 27 27  5 27  5 22  7 33 22 27  7 23 23 22 27
## [1] 45
##  [1] 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5
## [1] 46
##  [1]  5 33 33  5  5  5  5 33 33 33  5 33 33  5  5 33 33 33  5  5
## [1] 47
##  [1] 5 5 5 5 5 5 5 2 2 2 5 5 2 5 5 5 5 5 5 5
## [1] 48
##  [1] 2 2 5 2 2 2 2 2 2 2 2 5 2 2 2 5 5 5 2 2
## [1] 49
##  [1]  2 35  5  2  2  2  2 35 35 35  2  5 35  2  2  5  5  5  2  2
## [1] 50
##  [1]  2 35 35  2  2  2  2 11 11 11  2 35 11  2  2 35 35 35  2  2
## [1] 51
##  [1] 11 15 35  2  2 11 11 15 15 15  2 35 15  2 11 35 35 35  2 11
## [1] 52
##  [1] 11 12 15  2  2 15 11 12 12 12  2 15 12  2 15 15 15 15  2 11
## [1] 53
##  [1] 12 12 12 12 12 12 12 29 29 29 12 12 29 12 12 12 12 12 12 12
## [1] 54
##  [1] 12  4 29 12 12 12 12  4  4  4 12 29  4 12 12 29 29 29 12 12
## [1] 55
##  [1]  4  4  4 23 23  4  4  4  4 34 23  4 34 23  4  4  4  4 23  4
## [1] 56
##  [1]  4  2 34 23 23  4  4  2  2  2 23 34  2 23  4 34 34 34 23  4
## [1] 57
##  [1] 34 34 34  4  4 34 34 11 11 11  4 34 11  4 34 34 34 34  4 34
## [1] 58
##  [1] 34 34 11 34 34 34 34 21 21 21 34 11  7 34 34 11 11 11 34 34
## [1] 59
##  [1] 21 21  7 21 21 21 21  9  9  9 21  7  9 21 21  7  7  7 21 21
## [1] 60
##  [1] 21 20  7 21 21  9 21 20 20 20 21  7 20 21 21  7  7  7 21 21
## [1] 61
##  [1] 21  9  9 21 21  9  9 20 20 27 21  9 27 21  9  9  9  9 21  9
## [1] 62
##  [1] 21  9 21 21 21 21 21 27  9  9 21 27  9 21 21 21 21 21 21 21
## [1] 63
##  [1]  9  9 21  9  9  9  9 27 27 27  9 21  9  9  9 21 21 21  9  9
## [1] 64
##  [1] 27 27 31 27 27 27 27 27 27 27 27 31 27 27 27 31 27 31 27 27
## [1] 65
##  [1] 27  2 31 27 27 27 27  2  2  2 27 31  2 27 27 31 31 31 27 27
## [1] 66
##  [1] 31 31  2 31 31 31 31 21 21 21 31  2  3 31 31  2  2  2 31 31
## [1] 67
##  [1] 31 32  2 31 31 20 31 32 32 32 31  2 32 31 20  2  2  2 31 20
## [1] 68
##  [1] 20 20 32 31 31 20 20 20 20 20 31 32 20 31 32 32 32 32 31 20
## [1] 69
##  [1] 20 20 32 31 31 20 20 20 20 20 31 32 20 31 20 32 32 32 31 20
## [1] 70
##  [1] 20 20 32 16 16 20 20 20 20 20 16 32 20 16 20 32 32 32 16 20
## [1] 71
##  [1] 20 20 32 20 20 20 20 26 26 26 20 32 20 20 20 32 32 32 20 20
## [1] 72
##  [1] 26 26 26 20 20 26 26 26 26 26 20 29 26 20 26 29 26 26 20 26
## [1] 73
##  [1] 29 29 29 29 29 29 29 26 26 26 29 29 26 29 29 29 29 29 29 29
## [1] 74
##  [1] 29 33 29 29 29 29 29 33 33 33 29 33 33 29 29 33 29 29 29 29
## [1] 75
##  [1] 33 33 33 33 33 33 33 12 12 12 33 33 12 33 33 33 33 33 33 33
## [1] 76
##  [1] 33 12 12 33 33 33 33 12 12 29 33 12 29 33 33 12 12 12 33 33
## [1] 77
##  [1] 29 29 12 33 33 29 29 29 29 29 33 12 29 33 29 12 12 12 33 29
## [1] 78
##  [1] 29 29 12 29 29 29 29 29 29 29 29 12 29 33 29 12 12 12 29 29
## [1] 79
##  [1] 33 33 33 29 29 33 33 18 18 18 29 33 18 29 33 33 33 33 29 33
## [1] 80
##  [1] 18 33 33 18 18 18 18 11 11 11 18 33 11 29 18 33 33 33 29 18
## [1] 81
##  [1] 18 18 11 18 18 18 18  2  2  2 18 11  2 18 18 11 11 11 18 18
## [1] 82
##  [1] 18 18 18 18 18 18 18 19 15 19 18 18 19 18 18 18 18 18 18 18
## [1] 83
##  [1] 19 18 18  8  8 19 19  8  8  8  8 18  8  8 19 18 18 18  8 19
## [1] 84
##  [1]  8 23 18 19 19  8  8 23 23 23 19 23 23 19  8 18 18 18 19  8
## [1] 85
##  [1]  8 23 23  8  8  8  8 35 35 35  8 23 35  8  8 23 23 23  8  8
## [1] 86
##  [1]  8 18 35  8  8  8  8 18 18 18  8 35 18  8  8 35 35 23  8  8
## [1] 87
##  [1]  8 19 18  8  8 18  8 19 19 19  8 18 19  8 18 18 18 18  8  8
## [1] 88
##  [1] 19 19 19 19 19 18 19 19 19 32 19 19 32 19 18 19 19 18 19 19
## [1] 89
##  [1] 18 19 19 18 18 18 18  5  5  5 18 19  5 18 18 19 19 19 18 18
## [1] 90
##  [1]  5  5  5 18 18  5 18  5  5  5 18  5  5 18  5  5  5  5 18  5
## [1] 91
##  [1]  5 11  5 18 18  5  5 11 11 11 18  5 11 18  5  5  5  5 18  5
## [1] 92
##  [1]  5  5 11  5  5  5  5  5  5  2  5 11  2  5  5 11 11 11  5  5
## [1] 93
##  [1]  5  2  2  5  5  5  5 23 23 23  5  2 23  5  5  2  2  2  5  5
## [1] 94
##  [1]  5 16 23  5  5  5  5 16 16 16  5 23 16  5 16 23 23 23  5  5
## [1] 95
##  [1] 16 20 16  5  5 16 16 20 20 20  5 20 20  5 16 20 16 16  5 16
## [1] 96
##  [1] 16 20 20  5  5 16 16 30 30 30  5 20 30  5 16 20 20 20  5 16
## [1] 97
##  [1]  5 30 30  5  5  5  5 30 30  8  5 30  8  5  5 30 30 30  5  5
## [1] 98
##  [1]  8  7 30  8  8  8  8  7  7  7  8 30  7  8  8 30  8 30  8  8
## [1] 99
##  [1]  8  7  7  8  8  7  8  3 12  3  8  7  3  8  7  7  7  7  8  8
## [1] 100
##  [1]  3  3  3  3  8  3  3 23 23 23  8  3 23  8  3  3  3  3  8  3
## [1] 101
##  [1]  3 21 23  8  8 23  3 21  3 21  8 23  3  8 23 23 23 23  8  3
## [1] 102
##  [1] 21  3 23 21 21 21 21 11 11 11 21 23  3 21 21 23 23 23 21 21
## [1] 103
##  [1] 21  6 23 21 21 23 21  6  6  6 21 23  6 21 23 23 23 23 21 21
## [1] 104
##  [1] 23 23  6 23 23 23 23 29 29 29 23  6 29 23 23  6  6  6 21 23
## [1] 105
##  [1] 23 29  6 23 23 29 23 29 29 29 23  6 29 23 23  6  6  6 23 23
## [1] 106
##  [1] 29 29 29 29 29 29 29 29 29 29 29 29 29 29 29 29 29  6 29 29
## [1] 107
##  [1] 29 29  6 29 29 29 29 29 29 29 29  6 29 29 29  6  6  6 29 29
## [1] 108
##  [1] 29 24  6 29 29 29 29 24 24 24 29 24 24 29 29  6  6  6 29 29
## [1] 109
##  [1] 24 24  6  1  1 24 24 24 24 24  1  6 24  1 24  6  6  6  1 24
## [1] 110
##  [1] 24 24  6 24 24 24 24 24 24 24 24  6 24 24 24  6 24  6 24 24
## [1] 111
##  [1] 24 24  6 24 24 24 24 24 24 24 24  6 24 24 24  6 24  6 24 24
## [1] 112
##  [1] 24 24  6 24 24 24 24  2  2  2 24  6  2 24 24  6  6  6 24 24
## [1] 113
##  [1] 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2
## [1] 114
##  [1]  2  2  2  2  2  2  2 20  2 20  2  2  4  2  2  2  2  2  2  2
## [1] 115
##  [1] 2 5 4 2 2 2 2 5 5 5 2 4 5 2 2 4 4 4 2 2
## [1] 116
##  [1]  2 35  4  2  2  5  2 35 35 35  2  4 35  2  5  4  4  4  2  2
## [1] 117
##  [1]  5 21  5  2  2  5  5 21 21 21  2  5 21  2  5  5  5  5  2  5
## [1] 118
##  [1]  2 21  5  2  2 21 21  1  1  1  2  5 34  2 21  5  5  5  2 21
## [1] 119
##  [1] 34 34  5  2  2 34  1 34 34 34  2  5 34  2  1  5  5  5  2 34
## [1] 120
##  [1] 34 34  5 34 34 34  1 34 34 34 34  5 34 34 34  5  5  5 34 34
## [1] 121
##  [1]  1 34  5 34 34  1  1  9  9  9 34  5  9 34  1  5  5  5 34  1
## [1] 122
##  [1]  1 29  5  1  1  1  1 29 29 29  1  5 29  1  1  5  5  5  1  1
## [1] 123
##  [1]  1 29 29  1  1 29  1 20 20 20  1 29 20  1 29 29 29 29  1 29
## [1] 124
##  [1]  1 20 20  1  1 20 20 25 25 25  1 20 25  1 20 20 20 20  1 20
## [1] 125
##  [1] 25 25 20  1  1 25 25 25 25 14  1 20 14  1 25 20 20 20  1 25
## [1] 126
##  [1] 25 14 14 25 25 25 25 27 27 27 25 14 27  1 25 14 14 14 25 25
## [1] 127
##  [1]  1 27 14 25 25 27  1 30 27 30 25 14 30 25 27 14 14 14 25 25
## [1] 128
##  [1] 27 27 30 25 25 27  1 27 27 27 25 30 27 25 27 30 30 30 27 27
## [1] 129
##  [1] 25 27 27 27 27 25 25 21 21 21 27 27 21 27 25 27 27 27 27 25
## [1] 130
##  [1] 21 27 27 21 21 21 21 27 21 20 21 27  3 21 21 27 27 27 21 21
## [1] 131
##  [1] 20 20 20 21 21 20 20 20 20 20 21 27 20 21 20 27 20 27 21 20
## [1] 132
##  [1] 20 20 20 21 21 20 20 12 12 12 21 20 12 21 20 20 20 20 21 20
## [1] 133
##  [1] 12 12 20 12 12 20 20 34 34 34 12 20 34 12 12 20 20 20 12 12
## [1] 134
##  [1] 34 19 20 12 12 34 34 19 19 19 12 20 19 12 34 20 20 20 12 34
## [1] 135
##  [1] 34 27 27 19 19 34 34 27 27 27 19 27 27 19 34 19 19 27 19 34
## [1] 136
##  [1] 34 35 27 19 19 34 34 35 35 35 19 27 35 19 34 27 27 27 19 34
## [1] 137
##  [1] 35 35 35 35 35 35 35 26 26 26 35 35 26 19 35 35 35 35 19 35
## [1] 138
##  [1] 35 19 19 35 35 26 35 19 19 20 35 19 20 35 19 19 19 19 35 35
## [1] 139
##  [1] 26 20 20 26 26 26 26 20 20 12 26 20 12 35 26 20 20 20 26 26
## [1] 140
##  [1] 26 23 20 26 26 12 12 23 23 23 26 20 23 26 12 20 12 20 26 12
## [1] 141
##  [1] 23 23 12 23 23 23 23 23 23 23 23 12 23 23 23 12 12 12 23 23
## [1] 142
##  [1] 23 23 12 23 23 23 23 23 29 29 23 12 29 23 23 12 12 12 23 23
## [1] 143
##  [1] 29 11 11 23 23 29 29 11 29 11 23 11 29 23 29 11 11 11 23 29
## [1] 144
##  [1] 29 11 11 29 29 11 29 33 33 33 29 11 29 29 11 11 11 11 29 29
## [1] 145
##  [1] 11 33 11 29 29 11 11 33 33 33 29 11 33 29 11 11 11 11 29 11
## [1] 146
##  [1]  2  2  2  2  2  2 11  2  2  2  2  2  2  2  2  2  2 11  2  2
## [1] 147
##  [1]  2  2 11  2  2  2  2 23 23 23  2 11 23  2  2 11 11 11  2  2
## [1] 148
##  [1] 23 23  2 23 23 23 23 23 23 23 23  2 35 23 23  2  2  2 23 23
## [1] 149
##  [1] 23 21  2 23 23 23 23 21 21 21 23 35 21 23 23 35 35  2 23 23
## [1] 150
##  [1] 21 21 35 21 21 21 21 10 10 10 21 35 10 21 21 35 35 35 21 21
## [1] 151
##  [1] 21 10 10 21 21 21 21 14 14 14 21 10 14 21 21 10 10 10 21 21
## [1] 152
##  [1] 14 11 14 21 21 14 14 11  7 11 21 14  7 21 14 14 14 14 21 14
## [1] 153
##  [1]  7  7 14 21 21  7  7 17 17 17 21 14 17 21  7 14 14 14 21  7
## [1] 154
##  [1] 17  6 17 17 17 17 17  6  6  6 17  6  6  6 17  6 17  7  6 17
## [1] 155
##  [1] 17  6  6 17 17 17 17 29 29 29 17  6 29 17 17  6  6  6 17 17
## [1] 156
##  [1] 29 29  6 17 17 29 29 29 29 29 17  6 29 17 29  6  6  6 17 29
## [1] 157
##  [1] 29 31  6 29 29 29 29 31 31 31 29 31 31 29 29  6  6  6 29 29
## [1] 158
##  [1] 31 31 31 29 29 31 31 13 13 13 29 31 13 29 31 31 31 31 29 31
## [1] 159
##  [1] 31 32 13 31 31 31 31 32 32 32 31 13 32 31 31 13 13 13 31 31
## [1] 160
##  [1] 31 32 32 31 31 31 31 18 18 18 31 32 18 31 31 32 32 32 31 31
## [1] 161
##  [1] 31 18 32 31 31 31 31 31 31  4 31 32  4 31 31 32 32 32 31 31
## [1] 162
##  [1] 18 18  4 31 31 18 18  9  9  9 31  4  9 31 18  4  4  4 31 18
## [1] 163
##  [1]  9  9  4  9  9  9  9 17 17 17  9  4 17  9  9  4  4  4  9  9
## [1] 164
##  [1] 17 17 17 17 17 17 17 17 17 14 17 17 14 17 17 17 17  4 17 17
## [1] 165
##  [1] 17 14  4 17 17 17 17 14 14 14 17  4 14 17 17  4  4  4 17 17
## [1] 166
##  [1] 14 14  4 14 14 14 14 14 14 14 14  4 14 14 14  4  4  4 14 14
## [1] 167
##  [1]  4  4  4 14 14  4  4  8  8  8 14  4 14 14  4  4  4  4 14  4
## [1] 168
##  [1] 4 8 8 8 8 4 4 8 8 8 8 8 8 8 4 8 8 8 8 4
## [1] 169
##  [1] 4 8 4 8 8 4 4 8 8 8 8 4 8 8 4 4 4 4 8 4
## [1] 170
##  [1]  4  4  4  8  8  4  4 30 30 30  8  4  7  8  4  4  4  4  8  4
## [1] 171
##  [1] 30 30  4 30 30 30 30 30 30 30 30  4 30 30 30  7  4  4 30 30
## [1] 172
##  [1]  4  4  4 30 30  4  4  4  4  8 30  4 30 30  4  4  4  4 30  4
## [1] 173
##  [1]  4  4  8  4  4  4  4 25 25 25  4  8 32  4  4  8  8  8  4  4
## [1] 174
##  [1]  4 32 25  4  4  4  4  1  1  1  4 25  1  4 32 25 25 25 32 32
## [1] 175
##  [1] 32 32  1 32 32 32 32 32 32 32 32  1 32 32 32  1  1  1 32 32
## [1] 176
##  [1] 32 27  1 32 32 32 32 27 27 27 32  1 27 32 32  1  1  1 32 32
## [1] 177
##  [1] 27 28 27 27 27 27 27 28 28 28 27 27 28 27 27 27 27 27 27 27
## [1] 178
##  [1] 28 27 27 27 27 28 28  6  6  6 27 27  6 27 28 27 27 27 27 28
## [1] 179
##  [1] 27 27 27 28 28 27 27 22 22 22 28 27 22 28 27 27 27  6 28 27
## [1] 180
##  [1] 27 27 27 28 28 27 27 32 32 32 28 27 32 28 27 27 27 27 28 27
## [1] 181
##  [1] 32 32 27 28 28 32 32 32 32  9 28 27 29 28 32 27 27 27 28 32
## [1] 182
##  [1] 29  9  9 32 32 29 29  9  9  9 32  9  9 32 29  9  9  9 32 29
## [1] 183
##  [1]  9  9  9 29 29  9  9  9  9  9 29  9  9 29  9  9  9  9  9  9
## [1] 184
##  [1]  9  9  9  9  9  9  9  9  9 24  9  9 24  9  9  9  9  9  9  9
## [1] 185
##  [1]  9  9 24  9  9 24 24  9  9 13  9  9 13  9 24  9 24 24  9  9
## [1] 186
##  [1] 13 13 13 13 13 13 13 16 16 16 13  9 16 13 13  9  9 13 13 13
## [1] 187
##  [1] 13 16 16 13 13 13 13 31 31 31 13  9 31 13 13 16 16 16 13 13
## [1] 188
##  [1] 31 31 31 13 13 31 31 27 27 27 13 31 27 13 31 31 31 31 13 31
## [1] 189
##  [1] 27 27 27 13 13 27 27 27 27 27 13 27 27 13 27 31 27 27 13 27
## [1] 190
##  [1] 27 27 27 27 27 27 27 29 29 29 27 27 29 13 27 27 27 27 13 27
## [1] 191
##  [1] 27 29 29 27 27 27 27  9  9  9 27 29  9 13 27 27 29 29 13 27
## [1] 192
##  [1]  9  9  9  9  9  9  9  9  9  9  9  9  9  9  9 27  9 29  9  9
## [1] 193
##  [1]  9  9 29  9  9  9  9  9  9  9  9 29  9  9  9 29 29 29  9  9
## [1] 194
##  [1]  9  9 29  9  9  9  9  9 10 10  9 29 10  9  9 29 29 29  9  9
## [1] 195
##  [1] 10  9  9 10 10 10 10 28 28 28 10  9 28 10 10  9  9  9 10 10
## [1] 196
##  [1] 10 28 28 10 10 28 28  6  6  6 10 28  6 10 28 28 28 28 10 28
## [1] 197
##  [1]  6  6 28 10 10  6  6  6  6  6 10 28  6 10  6 28 28 28 10  6
## [1] 198
##  [1]  6  6 28  6  6  6  6  6  6  6  6 28  6  6  6 28  6 28  6  6
## [1] 199
##  [1]  6  6 28  6  6  6  6  6  6  6  6 28  6  6  6 28 28 28  6  6
## [1] 200
##  [1]  6  6  6  6  6  6  6  6  6  6  6 23  6  6  6 23  6  6  6  6
## Show the heatmap for the clustering result
cluster_matrix = matrix(0, nrow = J, ncol = J)

for(t in 1 : J){
  for(s in 1 : J){
    temp_count = 0
    for(i in 30 : 174){
      temp_count = temp_count + as.numeric(ind_center_store[[i]][t] == ind_center_store[[i]][s])
    }
    cluster_matrix[t, s] = temp_count / 14
  }
}
heatmap(1 - cluster_matrix)